1use crate::{
4 Allocation, SizedTypeProperties, align_up, assert_unsafe_precondition, page_size,
5 vec::{
6 GrowthStrategy, TryReserveError,
7 TryReserveErrorKind::{AllocError, CapacityOverflow},
8 capacity_overflow, handle_error,
9 },
10};
11use core::{
12 alloc::Layout,
13 borrow::{Borrow, BorrowMut},
14 cell::UnsafeCell,
15 cmp, fmt,
16 hash::{Hash, Hasher},
17 iter::FusedIterator,
18 marker::PhantomData,
19 mem::{ManuallyDrop, MaybeUninit},
20 ops::{Deref, DerefMut, Index, IndexMut},
21 panic::RefUnwindSafe,
22 ptr,
23 slice::{self, SliceIndex},
24 sync::atomic::{
25 AtomicBool, AtomicUsize,
26 Ordering::{Acquire, Relaxed, Release},
27 },
28};
29
30pub mod raw;
31
32#[derive(Clone, Copy, Debug)]
38pub struct VecBuilder {
39 max_capacity: usize,
40 capacity: usize,
41 growth_strategy: GrowthStrategy,
42}
43
44impl VecBuilder {
45 #[inline]
46 const fn new(max_capacity: usize) -> Self {
47 VecBuilder {
48 max_capacity,
49 capacity: 0,
50 growth_strategy: GrowthStrategy::new(),
51 }
52 }
53
54 #[inline]
60 pub const fn capacity(&mut self, capacity: usize) -> &mut Self {
61 self.capacity = capacity;
62
63 self
64 }
65
66 #[inline]
72 #[track_caller]
73 pub const fn growth_strategy(&mut self, growth_strategy: GrowthStrategy) -> &mut Self {
74 growth_strategy.validate();
75
76 self.growth_strategy = growth_strategy;
77
78 self
79 }
80
81 #[must_use]
91 #[track_caller]
92 pub fn build<T>(&self) -> Vec<T> {
93 match self.try_build() {
94 Ok(vec) => vec,
95 Err(err) => handle_error(err),
96 }
97 }
98
99 pub fn try_build<T>(&self) -> Result<Vec<T>, TryReserveError> {
109 Ok(Vec {
110 inner: RawVec {
111 inner: unsafe {
112 RawVecInner::new(
113 self.max_capacity,
114 self.capacity,
115 self.growth_strategy,
116 Layout::new::<()>(),
117 size_of::<T>(),
118 align_of::<T>(),
119 )
120 }?,
121 marker: PhantomData,
122 },
123 })
124 }
125}
126
127pub struct Vec<T> {
143 inner: RawVec<Slot<T>>,
152}
153
154unsafe impl<T: Send> Send for Vec<T> {}
157
158unsafe impl<T: Send + Sync> Sync for Vec<T> {}
163
164impl Vec<()> {
165 #[inline]
176 #[must_use]
177 pub const fn builder(max_capacity: usize) -> VecBuilder {
178 VecBuilder::new(max_capacity)
179 }
180}
181
182impl<T> Vec<T> {
183 #[must_use]
200 #[track_caller]
201 pub fn new(max_capacity: usize) -> Self {
202 Vec {
203 inner: unsafe { RawVec::new(max_capacity) },
204 }
205 }
206
207 #[inline]
212 #[must_use]
213 pub const fn dangling() -> Self {
214 Vec {
215 inner: RawVec::dangling(),
216 }
217 }
218
219 #[inline]
221 #[must_use]
222 pub fn max_capacity(&self) -> usize {
223 self.inner.max_capacity()
224 }
225
226 #[inline]
230 #[must_use]
231 pub fn capacity(&self) -> usize {
232 self.inner.capacity()
233 }
234
235 #[inline]
242 #[must_use]
243 pub fn len(&self) -> usize {
244 self.inner.len()
245 }
246
247 #[inline]
251 #[must_use]
252 pub fn is_empty(&self) -> bool {
253 self.inner.is_empty()
254 }
255
256 #[inline]
258 #[track_caller]
259 pub fn push(&self, value: T) -> usize {
260 self.push_with(|_| value)
261 }
262
263 #[inline]
268 #[track_caller]
269 pub fn push_with(&self, f: impl FnOnce(usize) -> T) -> usize {
270 let (index, slot) = self.inner.push();
271
272 unsafe { &mut *slot.value.get() }.write(f(index));
274
275 unsafe { slot.occupied.store(true, Release) };
277
278 index
279 }
280
281 #[inline]
283 #[track_caller]
284 pub fn push_mut(&mut self, value: T) -> usize {
285 self.push_with_mut(|_| value)
286 }
287
288 #[inline]
293 #[track_caller]
294 pub fn push_with_mut(&mut self, f: impl FnOnce(usize) -> T) -> usize {
295 let (index, slot) = self.inner.push_mut();
296
297 slot.value.get_mut().write(f(index));
298
299 unsafe { *slot.occupied.get_mut() = true };
301
302 index
303 }
304
305 #[inline]
307 #[must_use]
308 pub fn get(&self, index: usize) -> Option<&T> {
309 self.inner.as_capacity().get(index)?.value()
310 }
311
312 #[inline]
314 #[must_use]
315 pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
316 self.inner.as_mut_capacity().get_mut(index)?.value_mut()
317 }
318
319 #[inline]
328 #[must_use]
329 pub unsafe fn get_unchecked(&self, index: usize) -> &T {
330 let slot = unsafe { self.inner.as_capacity().get_unchecked(index) };
332
333 let occupied = slot.occupied.load(Acquire);
334
335 assert_unsafe_precondition!(
336 occupied,
337 "`Vec::get_unchecked` requires that `index` refers to an initialized element",
338 );
339
340 unsafe { slot.value_unchecked() }
344 }
345
346 #[inline]
355 #[must_use]
356 pub unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
357 let slot = unsafe { self.inner.as_mut_capacity().get_unchecked_mut(index) };
359
360 assert_unsafe_precondition!(
361 *slot.occupied.get_mut(),
362 "`Vec::get_unchecked` requires that `index` refers to an initialized element",
363 );
364
365 unsafe { slot.value_unchecked_mut() }
367 }
368
369 #[inline]
371 pub fn iter(&self) -> Iter<'_, T> {
372 Iter::new(self)
373 }
374
375 #[inline]
377 pub fn iter_mut(&mut self) -> IterMut<'_, T> {
378 IterMut {
379 inner: self.inner.iter_mut(),
380 }
381 }
382
383 #[track_caller]
401 pub fn reserve(&self, additional: usize) {
402 self.inner.reserve(additional);
403 }
404
405 #[track_caller]
424 pub fn reserve_exact(&self, additional: usize) {
425 self.inner.reserve_exact(additional);
426 }
427
428 pub fn try_reserve(&self, additional: usize) -> Result<(), TryReserveError> {
447 self.inner.try_reserve(additional)
448 }
449
450 pub fn try_reserve_exact(&self, additional: usize) -> Result<(), TryReserveError> {
470 self.inner.try_reserve_exact(additional)
471 }
472}
473
474impl<T> AsRef<Vec<T>> for Vec<T> {
475 #[inline]
476 fn as_ref(&self) -> &Vec<T> {
477 self
478 }
479}
480
481impl<T> AsMut<Vec<T>> for Vec<T> {
482 #[inline]
483 fn as_mut(&mut self) -> &mut Vec<T> {
484 self
485 }
486}
487
488impl<T: fmt::Debug> fmt::Debug for Vec<T> {
489 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
490 fmt::Debug::fmt(&self.inner, f)
491 }
492}
493
494impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for Vec<T> {
495 #[inline]
496 fn eq(&self, other: &Vec<U>) -> bool {
497 self.inner == other.inner
498 }
499}
500
501impl<T: PartialEq<U>, U> PartialEq<&[U]> for Vec<T> {
502 #[inline]
503 fn eq(&self, other: &&[U]) -> bool {
504 *self == **other
505 }
506}
507
508impl<T: PartialEq<U>, U> PartialEq<&mut [U]> for Vec<T> {
509 #[inline]
510 fn eq(&self, other: &&mut [U]) -> bool {
511 *self == **other
512 }
513}
514
515impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &[T] {
516 #[inline]
517 fn eq(&self, other: &Vec<U>) -> bool {
518 **self == *other
519 }
520}
521
522impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &mut [T] {
523 #[inline]
524 fn eq(&self, other: &Vec<U>) -> bool {
525 **self == *other
526 }
527}
528
529impl<T: PartialEq<U>, U> PartialEq<[U]> for Vec<T> {
530 #[inline]
531 fn eq(&self, other: &[U]) -> bool {
532 let len = self.len();
533
534 if len != other.len() {
535 return false;
536 }
537
538 #[allow(clippy::needless_range_loop)]
539 for index in 0..len {
540 let Some(elem) = self.get(index) else {
541 return false;
542 };
543
544 if *elem != other[index] {
545 return false;
546 }
547 }
548
549 true
550 }
551}
552
553impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for [T] {
554 #[inline]
555 fn eq(&self, other: &Vec<U>) -> bool {
556 let len = other.len();
557
558 if len != self.len() {
559 return false;
560 }
561
562 #[allow(clippy::needless_range_loop)]
563 for index in 0..len {
564 let Some(elem) = other.get(index) else {
565 return false;
566 };
567
568 if self[index] != *elem {
569 return false;
570 }
571 }
572
573 true
574 }
575}
576
577#[cfg(feature = "alloc")]
578impl<T: PartialEq<U> + Clone, U> PartialEq<Vec<U>> for alloc::borrow::Cow<'_, [T]> {
579 #[inline]
580 fn eq(&self, other: &Vec<U>) -> bool {
581 **self == *other
582 }
583}
584
585impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for Vec<T> {
586 #[inline]
587 fn eq(&self, other: &[U; N]) -> bool {
588 *self == *other.as_slice()
589 }
590}
591
592impl<T: PartialEq<U>, U, const N: usize> PartialEq<&[U; N]> for Vec<T> {
593 #[inline]
594 fn eq(&self, other: &&[U; N]) -> bool {
595 *self == *other.as_slice()
596 }
597}
598
599impl<T: Eq> Eq for Vec<T> {}
600
601impl<T> Extend<T> for Vec<T> {
602 #[inline]
603 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
604 for elem in iter {
605 self.push_mut(elem);
606 }
607 }
608}
609
610impl<'a, T: Copy> Extend<&'a T> for Vec<T> {
611 #[inline]
612 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
613 for &elem in iter {
614 self.push_mut(elem);
615 }
616 }
617}
618
619impl<T: Hash> Hash for Vec<T> {
620 #[inline]
621 fn hash<H: Hasher>(&self, state: &mut H) {
622 Hash::hash(&*self.inner, state);
623 }
624}
625
626impl<T> Index<usize> for Vec<T> {
627 type Output = T;
628
629 #[inline]
630 #[track_caller]
631 fn index(&self, index: usize) -> &Self::Output {
632 if let Some(value) = self.get(index) {
633 value
634 } else {
635 invalid_index(index)
636 }
637 }
638}
639
640impl<T> IndexMut<usize> for Vec<T> {
641 #[inline]
642 #[track_caller]
643 fn index_mut(&mut self, index: usize) -> &mut Self::Output {
644 if let Some(value) = self.get_mut(index) {
645 value
646 } else {
647 invalid_index(index)
648 }
649 }
650}
651
652#[cold]
653#[inline(never)]
654#[track_caller]
655fn invalid_index(index: usize) -> ! {
656 panic!("index {index} is out of bounds or not yet initialized");
657}
658
659impl<'a, T> IntoIterator for &'a Vec<T> {
660 type Item = &'a T;
661
662 type IntoIter = Iter<'a, T>;
663
664 #[inline]
665 fn into_iter(self) -> Self::IntoIter {
666 self.iter()
667 }
668}
669
670impl<'a, T> IntoIterator for &'a mut Vec<T> {
671 type Item = &'a mut T;
672
673 type IntoIter = IterMut<'a, T>;
674
675 #[inline]
676 fn into_iter(self) -> Self::IntoIter {
677 self.iter_mut()
678 }
679}
680
681impl<T> IntoIterator for Vec<T> {
682 type Item = T;
683
684 type IntoIter = IntoIter<T>;
685
686 #[inline]
687 fn into_iter(self) -> Self::IntoIter {
688 IntoIter::new(self)
689 }
690}
691
692impl<T: PartialOrd> PartialOrd for Vec<T> {
693 #[inline]
694 fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
695 PartialOrd::partial_cmp(&self.inner, &other.inner)
696 }
697}
698
699impl<T: Ord> Ord for Vec<T> {
700 #[inline]
701 fn cmp(&self, other: &Self) -> cmp::Ordering {
702 Ord::cmp(&self.inner, &other.inner)
703 }
704}
705
706struct Slot<T> {
707 occupied: AtomicBool,
708 value: UnsafeCell<MaybeUninit<T>>,
709}
710
711impl<T: RefUnwindSafe> RefUnwindSafe for Slot<T> {}
714
715impl<T> Slot<T> {
716 #[inline(always)]
717 fn value(&self) -> Option<&T> {
718 if self.occupied.load(Acquire) {
719 Some(unsafe { self.value_unchecked() })
723 } else {
724 None
725 }
726 }
727
728 #[inline(always)]
729 fn value_mut(&mut self) -> Option<&mut T> {
730 if *self.occupied.get_mut() {
731 Some(unsafe { self.value_unchecked_mut() })
733 } else {
734 None
735 }
736 }
737
738 #[inline(always)]
739 unsafe fn value_unchecked(&self) -> &T {
740 let value = unsafe { &*self.value.get() };
742
743 unsafe { value.assume_init_ref() }
745 }
746
747 #[inline(always)]
748 unsafe fn value_unchecked_mut(&mut self) -> &mut T {
749 unsafe { self.value.get_mut().assume_init_mut() }
751 }
752}
753
754impl<T: Clone> Clone for Slot<T> {
755 #[inline]
756 fn clone(&self) -> Self {
757 let value = self.value();
758
759 Slot {
760 occupied: AtomicBool::new(value.is_some()),
761 value: UnsafeCell::new(if let Some(value) = value {
762 MaybeUninit::new(value.clone())
763 } else {
764 MaybeUninit::uninit()
765 }),
766 }
767 }
768}
769
770impl<T: fmt::Debug> fmt::Debug for Slot<T> {
771 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
772 fmt::Debug::fmt(&self.value(), f)
773 }
774}
775
776impl<T> Drop for Slot<T> {
777 fn drop(&mut self) {
778 if *self.occupied.get_mut() {
779 unsafe { self.value.get_mut().assume_init_drop() };
782 }
783 }
784}
785
786impl<T: PartialEq<U>, U> PartialEq<Slot<U>> for Slot<T> {
787 #[allow(clippy::match_same_arms)]
788 #[inline]
789 fn eq(&self, other: &Slot<U>) -> bool {
790 match (self.value(), other.value()) {
791 (Some(value), Some(other_value)) => *value == *other_value,
792 (Some(_), None) => false,
793 (None, Some(_)) => false,
794 (None, None) => true,
795 }
796 }
797}
798
799impl<T: Eq> Eq for Slot<T> {}
800
801impl<T: Hash> Hash for Slot<T> {
802 #[inline]
803 fn hash<H: Hasher>(&self, state: &mut H) {
804 self.value().hash(state);
805 }
806}
807
808impl<T: PartialOrd> PartialOrd for Slot<T> {
809 #[inline]
810 fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
811 PartialOrd::partial_cmp(&self.value(), &other.value())
812 }
813}
814
815impl<T: Ord> Ord for Slot<T> {
816 #[inline]
817 fn cmp(&self, other: &Self) -> cmp::Ordering {
818 Ord::cmp(&self.value(), &other.value())
819 }
820}
821
822pub struct Iter<'a, T> {
828 start: *const (),
829 end: *const (),
830 marker: PhantomData<&'a T>,
831}
832
833unsafe impl<T: Sync> Send for Iter<'_, T> {}
835
836unsafe impl<T: Sync> Sync for Iter<'_, T> {}
838
839impl<T> Iter<'_, T> {
840 #[inline]
841 fn new(vec: &Vec<T>) -> Self {
842 let start = vec.inner.as_ptr();
843
844 let len = cmp::min(vec.len(), vec.capacity());
845
846 let end = unsafe { start.add(len) };
850
851 Iter {
852 start: start.cast(),
853 end: end.cast(),
854 marker: PhantomData,
855 }
856 }
857
858 #[inline]
859 fn start(&self) -> *const Slot<T> {
860 self.start.cast()
861 }
862
863 #[inline]
864 fn end(&self) -> *const Slot<T> {
865 self.end.cast()
866 }
867
868 #[inline]
869 fn len(&self) -> usize {
870 unsafe { self.end().offset_from_unsigned(self.start()) }
876 }
877
878 fn as_slice(&self) -> &[Slot<T>] {
879 unsafe { slice::from_raw_parts(self.start(), self.len()) }
880 }
881}
882
883impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
884 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
885 struct Elements<'a, T>(&'a [Slot<T>]);
886
887 impl<T: fmt::Debug> fmt::Debug for Elements<'_, T> {
888 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
889 f.debug_list()
890 .entries(self.0.iter().filter_map(Slot::value))
891 .finish()
892 }
893 }
894
895 f.debug_tuple("Iter")
896 .field(&Elements(self.as_slice()))
897 .finish()
898 }
899}
900
901impl<'a, T> Iterator for Iter<'a, T> {
902 type Item = &'a T;
903
904 #[inline]
905 fn next(&mut self) -> Option<Self::Item> {
906 loop {
907 if self.start == self.end {
908 break None;
909 }
910
911 let start = self.start();
912
913 self.start = unsafe { start.add(1) }.cast();
915
916 let slot = unsafe { &*start };
918
919 if let Some(value) = slot.value() {
920 break Some(value);
921 }
922 }
923 }
924
925 #[inline]
926 fn size_hint(&self) -> (usize, Option<usize>) {
927 (0, Some(self.len()))
928 }
929}
930
931impl<T> DoubleEndedIterator for Iter<'_, T> {
932 #[inline]
933 fn next_back(&mut self) -> Option<Self::Item> {
934 loop {
935 if self.start == self.end {
936 break None;
937 }
938
939 let end = unsafe { self.end().sub(1) };
941
942 self.end = end.cast();
943
944 let slot = unsafe { &*end };
946
947 if let Some(value) = slot.value() {
948 break Some(value);
949 }
950 }
951 }
952}
953
954impl<T> FusedIterator for Iter<'_, T> {}
955
956pub struct IterMut<'a, T> {
962 inner: slice::IterMut<'a, Slot<T>>,
963}
964
965impl<'a, T> Iterator for IterMut<'a, T> {
966 type Item = &'a mut T;
967
968 #[inline]
969 fn next(&mut self) -> Option<Self::Item> {
970 loop {
971 let slot = self.inner.next()?;
972
973 if let Some(value) = slot.value_mut() {
974 break Some(value);
975 }
976 }
977 }
978
979 #[inline]
980 fn size_hint(&self) -> (usize, Option<usize>) {
981 (0, Some(self.inner.len()))
982 }
983}
984
985impl<T> DoubleEndedIterator for IterMut<'_, T> {
986 #[inline]
987 fn next_back(&mut self) -> Option<Self::Item> {
988 loop {
989 let slot = self.inner.next_back()?;
990
991 if let Some(value) = slot.value_mut() {
992 break Some(value);
993 }
994 }
995 }
996}
997
998impl<T> FusedIterator for IterMut<'_, T> {}
999
1000pub struct IntoIter<T> {
1006 start: *mut (),
1007 end: *mut (),
1008 #[allow(dead_code)]
1009 allocation: Allocation,
1010 marker: PhantomData<T>,
1011}
1012
1013unsafe impl<T: Send> Send for IntoIter<T> {}
1015
1016unsafe impl<T: Sync> Sync for IntoIter<T> {}
1018
1019impl<T> IntoIter<T> {
1020 #[inline]
1021 pub(super) fn new(vec: Vec<T>) -> Self {
1022 let mut vec = ManuallyDrop::new(vec.inner);
1023
1024 let allocation = unsafe { ptr::read(&vec.inner.allocation) };
1027
1028 let start = vec.as_mut_ptr();
1029
1030 let len = cmp::min(vec.len_mut(), vec.capacity_mut());
1031
1032 let end = unsafe { start.add(len) };
1035
1036 IntoIter {
1037 start: start.cast(),
1038 end: end.cast(),
1039 allocation,
1040 marker: PhantomData,
1041 }
1042 }
1043
1044 #[inline]
1045 fn start(&self) -> *mut Slot<T> {
1046 self.start.cast()
1047 }
1048
1049 #[inline]
1050 fn end(&self) -> *mut Slot<T> {
1051 self.end.cast()
1052 }
1053
1054 #[inline]
1055 fn len(&self) -> usize {
1056 unsafe { self.end().offset_from_unsigned(self.start()) }
1062 }
1063
1064 fn as_slice(&self) -> &[Slot<T>] {
1065 unsafe { slice::from_raw_parts(self.start(), self.len()) }
1066 }
1067}
1068
1069impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
1070 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1071 struct Elements<'a, T>(&'a [Slot<T>]);
1072
1073 impl<T: fmt::Debug> fmt::Debug for Elements<'_, T> {
1074 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1075 f.debug_list()
1076 .entries(self.0.iter().filter_map(Slot::value))
1077 .finish()
1078 }
1079 }
1080
1081 f.debug_tuple("IntoIter")
1082 .field(&Elements(self.as_slice()))
1083 .finish()
1084 }
1085}
1086
1087impl<T> Drop for IntoIter<T> {
1088 fn drop(&mut self) {
1089 let elements = ptr::slice_from_raw_parts_mut(self.start(), self.len());
1090
1091 unsafe { elements.drop_in_place() };
1094 }
1095}
1096
1097impl<T> Iterator for IntoIter<T> {
1098 type Item = T;
1099
1100 #[inline]
1101 fn next(&mut self) -> Option<Self::Item> {
1102 loop {
1103 if self.start == self.end {
1104 break None;
1105 }
1106
1107 let start = self.start();
1108
1109 self.start = unsafe { start.add(1) }.cast();
1111
1112 let slot = unsafe { &*start };
1114
1115 if let Some(value) = slot.value() {
1116 break Some(unsafe { ptr::read(value) });
1119 }
1120 }
1121 }
1122
1123 #[inline]
1124 fn size_hint(&self) -> (usize, Option<usize>) {
1125 (0, Some(self.len()))
1126 }
1127}
1128
1129impl<T> DoubleEndedIterator for IntoIter<T> {
1130 #[inline]
1131 fn next_back(&mut self) -> Option<Self::Item> {
1132 loop {
1133 if self.start == self.end {
1134 break None;
1135 }
1136
1137 let end = unsafe { self.end().sub(1) };
1139
1140 self.end = end.cast();
1141
1142 let slot = unsafe { &*end };
1144
1145 if let Some(value) = slot.value() {
1146 break Some(unsafe { ptr::read(value) });
1149 }
1150 }
1151 }
1152}
1153
1154impl<T> FusedIterator for IntoIter<T> {}
1155
1156pub struct RawVec<T> {
1168 inner: RawVecInner,
1169 marker: PhantomData<T>,
1170}
1171
1172struct RawVecInner {
1173 elements: *mut (),
1174 capacity: AtomicUsize,
1175 len: AtomicUsize,
1176 max_capacity: usize,
1177 growth_strategy: GrowthStrategy,
1178 allocation: Allocation,
1179}
1180
1181impl RawVec<()> {
1182 #[inline]
1193 #[must_use]
1194 pub const fn builder(max_capacity: usize) -> raw::VecBuilder {
1195 raw::VecBuilder::new(max_capacity)
1196 }
1197}
1198
1199impl<T> RawVec<T> {
1200 #[must_use]
1221 #[track_caller]
1222 pub unsafe fn new(max_capacity: usize) -> Self {
1223 unsafe { RawVec::builder(max_capacity).build() }
1224 }
1225
1226 #[inline]
1231 #[must_use]
1232 pub const fn dangling() -> Self {
1233 RawVec {
1234 inner: RawVecInner::dangling(align_of::<T>()),
1235 marker: PhantomData,
1236 }
1237 }
1238
1239 #[inline]
1243 #[must_use]
1244 pub fn as_capacity(&self) -> &[T] {
1245 unsafe { slice::from_raw_parts(self.as_ptr(), self.capacity()) }
1249 }
1250
1251 #[inline]
1255 #[must_use]
1256 pub fn as_mut_capacity(&mut self) -> &mut [T] {
1257 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.capacity_mut()) }
1261 }
1262
1263 #[inline]
1272 #[must_use]
1273 pub fn as_slice(&self) -> &[T] {
1274 let len = cmp::min(self.len(), self.capacity());
1275
1276 unsafe { slice::from_raw_parts(self.as_ptr(), len) }
1280 }
1281
1282 #[inline]
1291 #[must_use]
1292 pub fn as_mut_slice(&mut self) -> &mut [T] {
1293 let len = cmp::min(self.len_mut(), self.capacity_mut());
1294
1295 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), len) }
1299 }
1300
1301 #[inline]
1307 #[must_use]
1308 pub fn as_ptr(&self) -> *const T {
1309 self.inner.elements.cast()
1310 }
1311
1312 #[inline]
1318 #[must_use]
1319 pub fn as_mut_ptr(&mut self) -> *mut T {
1320 self.inner.elements.cast()
1321 }
1322
1323 #[inline]
1325 #[must_use]
1326 pub fn max_capacity(&self) -> usize {
1327 self.inner.max_capacity
1328 }
1329
1330 #[inline]
1334 #[must_use]
1335 pub fn capacity(&self) -> usize {
1336 self.inner.capacity.load(Acquire)
1337 }
1338
1339 #[inline]
1340 fn capacity_mut(&mut self) -> usize {
1341 *self.inner.capacity.get_mut()
1342 }
1343
1344 #[inline]
1353 #[must_use]
1354 pub fn len(&self) -> usize {
1355 self.inner.len.load(Relaxed)
1356 }
1357
1358 #[inline]
1359 fn len_mut(&mut self) -> usize {
1360 *self.inner.len.get_mut()
1361 }
1362
1363 #[inline]
1367 #[must_use]
1368 pub fn is_empty(&self) -> bool {
1369 self.len() == 0
1370 }
1371
1372 #[inline]
1377 #[track_caller]
1378 pub fn push(&self) -> (usize, &T) {
1379 if T::IS_ZST {
1380 let mut len = self.inner.len.load(Relaxed);
1381
1382 loop {
1383 if len == self.inner.max_capacity {
1384 capacity_overflow();
1385 }
1386
1387 match self
1388 .inner
1389 .len
1390 .compare_exchange(len, len + 1, Relaxed, Relaxed)
1391 {
1392 Ok(_) => {
1393 let slot = unsafe { &*self.as_ptr() };
1396
1397 break (len, slot);
1398 }
1399 Err(new_len) => len = new_len,
1400 }
1401 }
1402 } else {
1403 let len = self.inner.len.fetch_add(1, Relaxed);
1407
1408 if len >= self.inner.capacity.load(Acquire) {
1409 unsafe { self.grow_one(len) };
1410 }
1411
1412 let slot = unsafe { &*self.as_ptr().add(len) };
1417
1418 (len, slot)
1419 }
1420 }
1421
1422 #[cold]
1423 #[inline(never)]
1424 #[track_caller]
1425 unsafe fn grow_one(&self, len: usize) {
1426 unsafe { self.inner.grow_one(len, size_of::<T>()) }
1427 }
1428
1429 #[inline]
1434 #[track_caller]
1435 pub fn push_mut(&mut self) -> (usize, &mut T) {
1436 let len = self.len_mut();
1437
1438 if len >= self.capacity_mut() {
1439 unsafe { self.grow_one_mut() };
1440 }
1441
1442 *self.inner.len.get_mut() = len + 1;
1443
1444 let slot = unsafe { &mut *self.as_mut_ptr().add(len) };
1448
1449 (len, slot)
1450 }
1451
1452 #[cold]
1453 #[inline(never)]
1454 #[track_caller]
1455 unsafe fn grow_one_mut(&mut self) {
1456 unsafe { self.inner.grow_one_mut(size_of::<T>()) }
1457 }
1458
1459 #[track_caller]
1477 pub fn reserve(&self, additional: usize) {
1478 unsafe { self.inner.reserve(additional, size_of::<T>()) };
1479 }
1480
1481 #[track_caller]
1500 pub fn reserve_exact(&self, additional: usize) {
1501 unsafe { self.inner.reserve_exact(additional, size_of::<T>()) };
1502 }
1503
1504 pub fn try_reserve(&self, additional: usize) -> Result<(), TryReserveError> {
1523 unsafe { self.inner.try_reserve(additional, size_of::<T>()) }
1524 }
1525
1526 pub fn try_reserve_exact(&self, additional: usize) -> Result<(), TryReserveError> {
1546 unsafe { self.inner.try_reserve_exact(additional, size_of::<T>()) }
1547 }
1548}
1549
1550impl RawVecInner {
1551 unsafe fn new(
1552 max_capacity: usize,
1553 capacity: usize,
1554 growth_strategy: GrowthStrategy,
1555 header_layout: Layout,
1556 elem_size: usize,
1557 elem_align: usize,
1558 ) -> Result<Self, TryReserveError> {
1559 let size = max_capacity
1560 .checked_mul(elem_size)
1561 .ok_or(CapacityOverflow)?;
1562
1563 #[allow(clippy::cast_possible_wrap)]
1564 if size > isize::MAX as usize || capacity > max_capacity {
1565 return Err(CapacityOverflow.into());
1566 }
1567
1568 if size == 0 && header_layout.size() == 0 {
1569 let allocation = Allocation::dangling(elem_align);
1570
1571 return Ok(RawVecInner {
1572 elements: allocation.ptr().cast(),
1573 capacity: AtomicUsize::new(max_capacity),
1574 len: AtomicUsize::new(0),
1575 max_capacity,
1576 growth_strategy,
1577 allocation,
1578 });
1579 }
1580
1581 let elements_offset = align_up(header_layout.size(), elem_align);
1583
1584 let size = elements_offset + size;
1587
1588 let page_size = page_size();
1589 let size = align_up(size, page_size);
1590
1591 if size == 0 {
1592 return Err(CapacityOverflow.into());
1593 }
1594
1595 let align = cmp::max(header_layout.align(), elem_align);
1596
1597 let align_size = cmp::max(align, page_size) - page_size;
1601
1602 let size = align_size.checked_add(size).ok_or(CapacityOverflow)?;
1603
1604 let allocation = Allocation::new(size).map_err(AllocError)?;
1605 let aligned_ptr = allocation.ptr().map_addr(|addr| align_up(addr, align));
1606
1607 let initial_size = align_up(elements_offset + capacity * elem_size, page_size);
1609
1610 if initial_size != 0 {
1611 allocation
1612 .commit(aligned_ptr, initial_size)
1613 .map_err(AllocError)?;
1614 }
1615
1616 let elements = aligned_ptr.wrapping_add(elements_offset).cast();
1617
1618 let capacity = if elem_size == 0 {
1619 max_capacity
1620 } else {
1621 (initial_size - elements_offset) / elem_size
1622 };
1623
1624 Ok(RawVecInner {
1625 elements,
1626 capacity: AtomicUsize::new(capacity),
1627 len: AtomicUsize::new(0),
1628 max_capacity,
1629 growth_strategy,
1630 allocation,
1631 })
1632 }
1633
1634 #[inline]
1635 const fn dangling(elem_align: usize) -> Self {
1636 let allocation = Allocation::dangling(elem_align);
1637
1638 RawVecInner {
1639 elements: allocation.ptr().cast(),
1640 capacity: AtomicUsize::new(0),
1641 len: AtomicUsize::new(0),
1642 max_capacity: 0,
1643 growth_strategy: GrowthStrategy::new(),
1644 allocation,
1645 }
1646 }
1647
1648 #[inline]
1649 #[track_caller]
1650 unsafe fn reserve(&self, additional: usize, elem_size: usize) {
1651 let len = self.len.load(Relaxed);
1652
1653 if self.needs_to_grow(len, additional) {
1654 unsafe { self.reserve_slow(len, additional, elem_size) };
1655 }
1656 }
1657
1658 #[cold]
1659 #[track_caller]
1660 unsafe fn reserve_slow(&self, len: usize, additional: usize, elem_size: usize) {
1661 if let Err(err) = unsafe { self.grow(len, additional, elem_size) } {
1662 handle_error(err);
1663 }
1664 }
1665
1666 #[track_caller]
1667 unsafe fn reserve_exact(&self, additional: usize, elem_size: usize) {
1668 if let Err(err) = unsafe { self.try_reserve_exact(additional, elem_size) } {
1669 handle_error(err);
1670 }
1671 }
1672
1673 unsafe fn try_reserve(
1674 &self,
1675 additional: usize,
1676 elem_size: usize,
1677 ) -> Result<(), TryReserveError> {
1678 let len = self.len.load(Relaxed);
1679
1680 if self.needs_to_grow(len, additional) {
1681 unsafe { self.grow(len, additional, elem_size) }
1682 } else {
1683 Ok(())
1684 }
1685 }
1686
1687 unsafe fn try_reserve_exact(
1688 &self,
1689 additional: usize,
1690 elem_size: usize,
1691 ) -> Result<(), TryReserveError> {
1692 let len = self.len.load(Relaxed);
1693
1694 if self.needs_to_grow(len, additional) {
1695 unsafe { self.grow_exact(len, additional, elem_size) }
1696 } else {
1697 Ok(())
1698 }
1699 }
1700
1701 #[inline]
1702 fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
1703 let capacity = self.capacity.load(Relaxed);
1704 let len = cmp::min(len, capacity);
1705
1706 additional > capacity - len
1707 }
1708
1709 #[track_caller]
1710 unsafe fn grow_one(&self, len: usize, elem_size: usize) {
1711 if let Err(err) = unsafe { self.grow(len, 1, elem_size) } {
1712 if len >= self.max_capacity {
1713 self.len.fetch_sub(1, Relaxed);
1723 }
1724
1725 handle_error(err);
1726 }
1727 }
1728
1729 #[track_caller]
1730 unsafe fn grow_one_mut(&mut self, elem_size: usize) {
1731 let len = *self.len.get_mut();
1732
1733 if let Err(err) = unsafe { self.grow(len, 1, elem_size) } {
1734 handle_error(err);
1735 }
1736 }
1737
1738 unsafe fn grow(
1739 &self,
1740 len: usize,
1741 additional: usize,
1742 elem_size: usize,
1743 ) -> Result<(), TryReserveError> {
1744 unsafe { self.grow_inner(len, additional, false, elem_size) }
1745 }
1746
1747 unsafe fn grow_exact(
1748 &self,
1749 len: usize,
1750 additional: usize,
1751 elem_size: usize,
1752 ) -> Result<(), TryReserveError> {
1753 unsafe { self.grow_inner(len, additional, true, elem_size) }
1754 }
1755
1756 unsafe fn grow_inner(
1757 &self,
1758 len: usize,
1759 additional: usize,
1760 exact: bool,
1761 elem_size: usize,
1762 ) -> Result<(), TryReserveError> {
1763 debug_assert!(additional > 0);
1764
1765 if elem_size == 0 {
1766 return Err(CapacityOverflow.into());
1767 }
1768
1769 let required_capacity = len.checked_add(additional).ok_or(CapacityOverflow)?;
1770
1771 if required_capacity > self.max_capacity {
1772 return Err(CapacityOverflow.into());
1773 }
1774
1775 let old_capacity = self.capacity.load(Acquire);
1776
1777 if old_capacity >= required_capacity {
1778 return Ok(());
1780 }
1781
1782 let mut new_capacity = required_capacity;
1783
1784 if !exact {
1785 new_capacity = cmp::max(new_capacity, self.growth_strategy.grow(old_capacity));
1786 new_capacity = cmp::min(new_capacity, self.max_capacity);
1787 }
1788
1789 let elements_offset = self.elements.addr() - self.allocation.ptr().addr();
1790
1791 let page_size = page_size();
1792
1793 let old_size = align_up(elements_offset + old_capacity * elem_size, page_size);
1795 let new_size = align_up(elements_offset + new_capacity * elem_size, page_size);
1796
1797 let ptr = self.allocation.ptr().wrapping_add(old_size);
1798 let size = new_size - old_size;
1799
1800 self.allocation.commit(ptr, size).map_err(AllocError)?;
1801
1802 let new_capacity = (new_size - elements_offset) / elem_size;
1803 let new_capacity = cmp::min(new_capacity, self.max_capacity);
1804
1805 if let Err(capacity) =
1806 self.capacity
1807 .compare_exchange(old_capacity, new_capacity, Release, Acquire)
1808 {
1809 debug_assert!(capacity >= new_capacity);
1811 }
1812
1813 Ok(())
1814 }
1815}
1816
1817impl<T> AsRef<RawVec<T>> for RawVec<T> {
1818 #[inline]
1819 fn as_ref(&self) -> &RawVec<T> {
1820 self
1821 }
1822}
1823
1824impl<T> AsMut<RawVec<T>> for RawVec<T> {
1825 #[inline]
1826 fn as_mut(&mut self) -> &mut RawVec<T> {
1827 self
1828 }
1829}
1830
1831impl<T> AsRef<[T]> for RawVec<T> {
1832 #[inline]
1833 fn as_ref(&self) -> &[T] {
1834 self
1835 }
1836}
1837
1838impl<T> AsMut<[T]> for RawVec<T> {
1839 #[inline]
1840 fn as_mut(&mut self) -> &mut [T] {
1841 self
1842 }
1843}
1844
1845impl<T> Borrow<[T]> for RawVec<T> {
1846 #[inline]
1847 fn borrow(&self) -> &[T] {
1848 self
1849 }
1850}
1851
1852impl<T> BorrowMut<[T]> for RawVec<T> {
1853 #[inline]
1854 fn borrow_mut(&mut self) -> &mut [T] {
1855 self
1856 }
1857}
1858
1859impl<T: fmt::Debug> fmt::Debug for RawVec<T> {
1860 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1861 fmt::Debug::fmt(&**self, f)
1862 }
1863}
1864
1865impl<T> Deref for RawVec<T> {
1866 type Target = [T];
1867
1868 #[inline]
1870 fn deref(&self) -> &Self::Target {
1871 self.as_slice()
1872 }
1873}
1874
1875impl<T> DerefMut for RawVec<T> {
1876 #[inline]
1878 fn deref_mut(&mut self) -> &mut Self::Target {
1879 self.as_mut_slice()
1880 }
1881}
1882
1883impl<T> Drop for RawVec<T> {
1884 fn drop(&mut self) {
1885 let len = cmp::min(self.len_mut(), self.capacity_mut());
1886 let elements = ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), len);
1887
1888 unsafe { elements.drop_in_place() };
1891 }
1892}
1893
1894impl<T: PartialEq<U>, U> PartialEq<RawVec<U>> for RawVec<T> {
1895 #[inline]
1896 fn eq(&self, other: &RawVec<U>) -> bool {
1897 **self == **other
1898 }
1899}
1900
1901impl<T: PartialEq<U>, U> PartialEq<&[U]> for RawVec<T> {
1902 #[inline]
1903 fn eq(&self, other: &&[U]) -> bool {
1904 **self == **other
1905 }
1906}
1907
1908impl<T: PartialEq<U>, U> PartialEq<&mut [U]> for RawVec<T> {
1909 #[inline]
1910 fn eq(&self, other: &&mut [U]) -> bool {
1911 **self == **other
1912 }
1913}
1914
1915impl<T: PartialEq<U>, U> PartialEq<RawVec<U>> for &[T] {
1916 #[inline]
1917 fn eq(&self, other: &RawVec<U>) -> bool {
1918 **self == **other
1919 }
1920}
1921
1922impl<T: PartialEq<U>, U> PartialEq<RawVec<U>> for &mut [T] {
1923 #[inline]
1924 fn eq(&self, other: &RawVec<U>) -> bool {
1925 **self == **other
1926 }
1927}
1928
1929impl<T: PartialEq<U>, U> PartialEq<[U]> for RawVec<T> {
1930 #[inline]
1931 fn eq(&self, other: &[U]) -> bool {
1932 **self == *other
1933 }
1934}
1935
1936impl<T: PartialEq<U>, U> PartialEq<RawVec<U>> for [T] {
1937 #[inline]
1938 fn eq(&self, other: &RawVec<U>) -> bool {
1939 *self == **other
1940 }
1941}
1942
1943#[cfg(feature = "alloc")]
1944impl<T: PartialEq<U> + Clone, U> PartialEq<RawVec<U>> for alloc::borrow::Cow<'_, [T]> {
1945 #[inline]
1946 fn eq(&self, other: &RawVec<U>) -> bool {
1947 **self == **other
1948 }
1949}
1950
1951impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for RawVec<T> {
1952 #[inline]
1953 fn eq(&self, other: &[U; N]) -> bool {
1954 **self == *other
1955 }
1956}
1957
1958impl<T: PartialEq<U>, U, const N: usize> PartialEq<&[U; N]> for RawVec<T> {
1959 #[inline]
1960 fn eq(&self, other: &&[U; N]) -> bool {
1961 **self == **other
1962 }
1963}
1964
1965impl<T: Eq> Eq for RawVec<T> {}
1966
1967impl<T> Extend<T> for RawVec<T> {
1968 #[inline]
1969 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
1970 for elem in iter {
1971 let (_, slot) = self.push_mut();
1972 *slot = elem;
1973 }
1974 }
1975}
1976
1977impl<'a, T: Copy> Extend<&'a T> for RawVec<T> {
1978 #[inline]
1979 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
1980 for &elem in iter {
1981 let (_, slot) = self.push_mut();
1982 *slot = elem;
1983 }
1984 }
1985}
1986
1987impl<T: Hash> Hash for RawVec<T> {
1988 #[inline]
1989 fn hash<H: Hasher>(&self, state: &mut H) {
1990 Hash::hash(&**self, state);
1991 }
1992}
1993
1994impl<T, I: SliceIndex<[T]>> Index<I> for RawVec<T> {
1995 type Output = I::Output;
1996
1997 #[inline]
1998 fn index(&self, index: I) -> &Self::Output {
1999 Index::index(&**self, index)
2000 }
2001}
2002
2003impl<T, I: SliceIndex<[T]>> IndexMut<I> for RawVec<T> {
2004 #[inline]
2005 fn index_mut(&mut self, index: I) -> &mut Self::Output {
2006 IndexMut::index_mut(&mut **self, index)
2007 }
2008}
2009
2010impl<'a, T> IntoIterator for &'a RawVec<T> {
2011 type Item = &'a T;
2012
2013 type IntoIter = slice::Iter<'a, T>;
2014
2015 #[inline]
2016 fn into_iter(self) -> Self::IntoIter {
2017 self.iter()
2018 }
2019}
2020
2021impl<'a, T> IntoIterator for &'a mut RawVec<T> {
2022 type Item = &'a mut T;
2023
2024 type IntoIter = slice::IterMut<'a, T>;
2025
2026 #[inline]
2027 fn into_iter(self) -> Self::IntoIter {
2028 self.iter_mut()
2029 }
2030}
2031
2032impl<T> IntoIterator for RawVec<T> {
2033 type Item = T;
2034
2035 type IntoIter = raw::IntoIter<T>;
2036
2037 #[inline]
2038 fn into_iter(self) -> Self::IntoIter {
2039 raw::IntoIter::new(self)
2040 }
2041}
2042
2043impl<T: PartialOrd> PartialOrd for RawVec<T> {
2044 #[inline]
2045 fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
2046 PartialOrd::partial_cmp(&**self, &**other)
2047 }
2048}
2049
2050impl<T: Ord> Ord for RawVec<T> {
2051 #[inline]
2052 fn cmp(&self, other: &Self) -> cmp::Ordering {
2053 Ord::cmp(&**self, &**other)
2054 }
2055}