1use self::TryReserveErrorKind::{AllocError, CapacityOverflow};
4use crate::{Allocation, Error, SizedTypeProperties, align_up, is_aligned, page_size};
5use core::{
6 alloc::Layout,
7 borrow::{Borrow, BorrowMut},
8 cmp, fmt,
9 hash::{Hash, Hasher},
10 iter::FusedIterator,
11 marker::PhantomData,
12 mem::ManuallyDrop,
13 ops::{Deref, DerefMut, Index, IndexMut},
14 panic::UnwindSafe,
15 ptr,
16 slice::{self, SliceIndex},
17};
18
19#[derive(Clone, Copy, Debug)]
25pub struct VecBuilder {
26 max_capacity: usize,
27 capacity: usize,
28 growth_strategy: GrowthStrategy,
29 header_layout: Layout,
30}
31
32impl VecBuilder {
33 #[inline]
34 const fn new(max_capacity: usize) -> Self {
35 VecBuilder {
36 max_capacity,
37 capacity: 0,
38 growth_strategy: GrowthStrategy::new(),
39 header_layout: Layout::new::<()>(),
40 }
41 }
42
43 #[inline]
49 pub const fn capacity(&mut self, capacity: usize) -> &mut Self {
50 self.capacity = capacity;
51
52 self
53 }
54
55 #[inline]
61 #[track_caller]
62 pub const fn growth_strategy(&mut self, growth_strategy: GrowthStrategy) -> &mut Self {
63 growth_strategy.validate();
64
65 self.growth_strategy = growth_strategy;
66
67 self
68 }
69
70 #[inline]
80 #[track_caller]
81 pub const fn header(&mut self, header_layout: Layout) -> &mut Self {
82 assert!(is_aligned(header_layout.size(), header_layout.align()));
83
84 self.header_layout = header_layout;
85
86 self
87 }
88
89 #[must_use]
99 #[track_caller]
100 pub fn build<T>(&self) -> Vec<T> {
101 match self.try_build() {
102 Ok(vec) => vec,
103 Err(err) => handle_error(err),
104 }
105 }
106
107 pub fn try_build<T>(&self) -> Result<Vec<T>, TryReserveError> {
117 Ok(Vec {
118 inner: unsafe {
119 VecInner::new(
120 self.max_capacity,
121 self.capacity,
122 self.growth_strategy,
123 self.header_layout,
124 size_of::<T>(),
125 align_of::<T>(),
126 )
127 }?,
128 marker: PhantomData,
129 })
130 }
131}
132
133pub struct Vec<T> {
144 inner: VecInner,
145 marker: PhantomData<T>,
146}
147
148struct VecInner {
149 elements: *mut (),
150 capacity: usize,
151 len: usize,
152 max_capacity: usize,
153 growth_strategy: GrowthStrategy,
154 allocation: Allocation,
155}
156
157unsafe impl<T: Send> Send for Vec<T> {}
160
161unsafe impl<T: Sync> Sync for Vec<T> {}
166
167impl Vec<()> {
168 #[inline]
179 #[must_use]
180 pub const fn builder(max_capacity: usize) -> VecBuilder {
181 VecBuilder::new(max_capacity)
182 }
183}
184
185impl<T> Vec<T> {
186 #[must_use]
203 #[track_caller]
204 pub fn new(max_capacity: usize) -> Self {
205 VecBuilder::new(max_capacity).build()
206 }
207
208 #[inline]
213 #[must_use]
214 pub const fn dangling() -> Self {
215 Vec {
216 inner: VecInner::dangling(align_of::<T>()),
217 marker: PhantomData,
218 }
219 }
220
221 #[inline]
223 #[must_use]
224 pub fn as_slice(&self) -> &[T] {
225 unsafe { slice::from_raw_parts(self.as_ptr(), self.len()) }
226 }
227
228 #[inline]
230 #[must_use]
231 pub fn as_mut_slice(&mut self) -> &mut [T] {
232 unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
233 }
234
235 #[inline]
241 #[must_use]
242 pub fn as_ptr(&self) -> *const T {
243 self.inner.elements.cast()
244 }
245
246 #[inline]
252 #[must_use]
253 pub fn as_mut_ptr(&mut self) -> *mut T {
254 self.inner.elements.cast()
255 }
256
257 #[inline]
259 #[must_use]
260 pub fn max_capacity(&self) -> usize {
261 self.inner.max_capacity
262 }
263
264 #[inline]
268 #[must_use]
269 pub fn capacity(&self) -> usize {
270 self.inner.capacity
271 }
272
273 #[inline]
275 #[must_use]
276 pub fn len(&self) -> usize {
277 self.inner.len
278 }
279
280 #[inline]
282 #[must_use]
283 pub fn is_empty(&self) -> bool {
284 self.len() == 0
285 }
286
287 #[inline]
289 #[track_caller]
290 pub fn push(&mut self, value: T) {
291 let len = self.len();
292
293 if len == self.capacity() {
294 self.grow_one();
295 }
296
297 unsafe { self.as_mut_ptr().add(len).write(value) };
299
300 unsafe { self.inner.len = len + 1 };
302 }
303
304 #[cold]
305 #[inline(never)]
306 #[track_caller]
307 fn grow_one(&mut self) {
308 unsafe { self.inner.grow_one(size_of::<T>()) };
309 }
310
311 #[track_caller]
329 pub fn reserve(&mut self, additional: usize) {
330 unsafe { self.inner.reserve(additional, size_of::<T>()) };
331 }
332
333 #[track_caller]
352 pub fn reserve_exact(&mut self, additional: usize) {
353 unsafe { self.inner.reserve_exact(additional, size_of::<T>()) };
354 }
355
356 pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
375 unsafe { self.inner.try_reserve(additional, size_of::<T>()) }
376 }
377
378 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
398 unsafe { self.inner.try_reserve_exact(additional, size_of::<T>()) }
399 }
400}
401
402impl VecInner {
403 unsafe fn new(
404 max_capacity: usize,
405 capacity: usize,
406 growth_strategy: GrowthStrategy,
407 header_layout: Layout,
408 elem_size: usize,
409 elem_align: usize,
410 ) -> Result<Self, TryReserveError> {
411 let size = max_capacity
412 .checked_mul(elem_size)
413 .ok_or(CapacityOverflow)?;
414
415 #[allow(clippy::cast_possible_wrap)]
416 if size > isize::MAX as usize || capacity > max_capacity {
417 return Err(CapacityOverflow.into());
418 }
419
420 if size == 0 && header_layout.size() == 0 {
421 let allocation = Allocation::dangling(elem_align);
422
423 return Ok(VecInner {
424 elements: allocation.ptr().cast(),
425 capacity: max_capacity,
426 len: 0,
427 max_capacity,
428 growth_strategy,
429 allocation,
430 });
431 }
432
433 let elements_offset = align_up(header_layout.size(), elem_align);
435
436 let size = elements_offset + size;
439
440 let page_size = page_size();
441 let size = align_up(size, page_size);
442
443 if size == 0 {
444 return Err(CapacityOverflow.into());
445 }
446
447 let align = cmp::max(header_layout.align(), elem_align);
448
449 let align_size = cmp::max(align, page_size) - page_size;
453
454 let size = align_size.checked_add(size).ok_or(CapacityOverflow)?;
455
456 let allocation = Allocation::new(size).map_err(AllocError)?;
457 let aligned_ptr = allocation.ptr().map_addr(|addr| align_up(addr, align));
458
459 let initial_size = align_up(elements_offset + capacity * elem_size, page_size);
461
462 if initial_size != 0 {
463 allocation
464 .commit(aligned_ptr, initial_size)
465 .map_err(AllocError)?;
466 }
467
468 let elements = aligned_ptr.wrapping_add(elements_offset).cast();
469
470 let capacity = if elem_size == 0 {
471 max_capacity
472 } else {
473 (initial_size - elements_offset) / elem_size
474 };
475
476 Ok(VecInner {
477 elements,
478 capacity,
479 len: 0,
480 max_capacity,
481 growth_strategy,
482 allocation,
483 })
484 }
485
486 #[inline]
487 const fn dangling(elem_align: usize) -> Self {
488 let allocation = Allocation::dangling(elem_align);
489
490 VecInner {
491 elements: allocation.ptr().cast(),
492 capacity: 0,
493 len: 0,
494 max_capacity: 0,
495 growth_strategy: GrowthStrategy::new(),
496 allocation,
497 }
498 }
499
500 #[inline]
501 #[track_caller]
502 unsafe fn reserve(&mut self, additional: usize, elem_size: usize) {
503 if self.needs_to_grow(additional) {
504 unsafe { self.reserve_slow(additional, elem_size) };
505 }
506 }
507
508 #[cold]
509 #[track_caller]
510 unsafe fn reserve_slow(&mut self, additional: usize, elem_size: usize) {
511 if let Err(err) = unsafe { self.grow(additional, elem_size) } {
512 handle_error(err);
513 }
514 }
515
516 #[track_caller]
517 unsafe fn reserve_exact(&mut self, additional: usize, elem_size: usize) {
518 if let Err(err) = unsafe { self.try_reserve_exact(additional, elem_size) } {
519 handle_error(err);
520 }
521 }
522
523 unsafe fn try_reserve(
524 &mut self,
525 additional: usize,
526 elem_size: usize,
527 ) -> Result<(), TryReserveError> {
528 if self.needs_to_grow(additional) {
529 unsafe { self.grow(additional, elem_size) }
530 } else {
531 Ok(())
532 }
533 }
534
535 unsafe fn try_reserve_exact(
536 &mut self,
537 additional: usize,
538 elem_size: usize,
539 ) -> Result<(), TryReserveError> {
540 if self.needs_to_grow(additional) {
541 unsafe { self.grow_exact(additional, elem_size) }
542 } else {
543 Ok(())
544 }
545 }
546
547 #[inline]
548 fn needs_to_grow(&self, additional: usize) -> bool {
549 additional > self.capacity - self.len
550 }
551
552 #[track_caller]
553 unsafe fn grow_one(&mut self, elem_size: usize) {
554 if let Err(err) = unsafe { self.grow(1, elem_size) } {
555 handle_error(err);
556 }
557 }
558
559 unsafe fn grow(&mut self, additional: usize, elem_size: usize) -> Result<(), TryReserveError> {
560 unsafe { self.grow_inner(additional, false, elem_size) }
561 }
562
563 unsafe fn grow_exact(
564 &mut self,
565 additional: usize,
566 elem_size: usize,
567 ) -> Result<(), TryReserveError> {
568 unsafe { self.grow_inner(additional, true, elem_size) }
569 }
570
571 unsafe fn grow_inner(
572 &mut self,
573 additional: usize,
574 exact: bool,
575 elem_size: usize,
576 ) -> Result<(), TryReserveError> {
577 debug_assert!(additional > 0);
578
579 if elem_size == 0 {
580 return Err(CapacityOverflow.into());
581 }
582
583 let required_capacity = self.len.checked_add(additional).ok_or(CapacityOverflow)?;
584
585 if required_capacity > self.max_capacity {
586 return Err(CapacityOverflow.into());
587 }
588
589 let old_capacity = self.capacity;
590
591 let mut new_capacity = required_capacity;
592
593 if !exact {
594 new_capacity = cmp::max(new_capacity, self.growth_strategy.grow(old_capacity));
595 new_capacity = cmp::min(new_capacity, self.max_capacity);
596 }
597
598 let elements_offset = self.elements.addr() - self.allocation.ptr().addr();
599
600 let page_size = page_size();
601
602 let old_size = align_up(elements_offset + old_capacity * elem_size, page_size);
604 let new_size = align_up(elements_offset + new_capacity * elem_size, page_size);
605
606 let ptr = self.allocation.ptr().wrapping_add(old_size);
607 let size = new_size - old_size;
608
609 self.allocation.commit(ptr, size).map_err(AllocError)?;
610
611 let new_capacity = (new_size - elements_offset) / elem_size;
612 let new_capacity = cmp::min(new_capacity, self.max_capacity);
613
614 self.capacity = new_capacity;
615
616 Ok(())
617 }
618}
619
620impl<T> AsRef<Vec<T>> for Vec<T> {
621 #[inline]
622 fn as_ref(&self) -> &Vec<T> {
623 self
624 }
625}
626
627impl<T> AsMut<Vec<T>> for Vec<T> {
628 #[inline]
629 fn as_mut(&mut self) -> &mut Vec<T> {
630 self
631 }
632}
633
634impl<T> AsRef<[T]> for Vec<T> {
635 #[inline]
636 fn as_ref(&self) -> &[T] {
637 self
638 }
639}
640
641impl<T> AsMut<[T]> for Vec<T> {
642 #[inline]
643 fn as_mut(&mut self) -> &mut [T] {
644 self
645 }
646}
647
648impl<T> Borrow<[T]> for Vec<T> {
649 #[inline]
650 fn borrow(&self) -> &[T] {
651 self
652 }
653}
654
655impl<T> BorrowMut<[T]> for Vec<T> {
656 #[inline]
657 fn borrow_mut(&mut self) -> &mut [T] {
658 self
659 }
660}
661
662impl<T: fmt::Debug> fmt::Debug for Vec<T> {
663 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
664 fmt::Debug::fmt(&**self, f)
665 }
666}
667
668impl<T> Deref for Vec<T> {
669 type Target = [T];
670
671 #[inline]
672 fn deref(&self) -> &Self::Target {
673 self.as_slice()
674 }
675}
676
677impl<T> DerefMut for Vec<T> {
678 #[inline]
679 fn deref_mut(&mut self) -> &mut Self::Target {
680 self.as_mut_slice()
681 }
682}
683
684impl<T> Drop for Vec<T> {
685 fn drop(&mut self) {
686 let elements = ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len());
687
688 unsafe { elements.drop_in_place() };
691 }
692}
693
694impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for Vec<T> {
695 #[inline]
696 fn eq(&self, other: &Vec<U>) -> bool {
697 **self == **other
698 }
699}
700
701impl<T: PartialEq<U>, U> PartialEq<&[U]> for Vec<T> {
702 #[inline]
703 fn eq(&self, other: &&[U]) -> bool {
704 **self == **other
705 }
706}
707
708impl<T: PartialEq<U>, U> PartialEq<&mut [U]> for Vec<T> {
709 #[inline]
710 fn eq(&self, other: &&mut [U]) -> bool {
711 **self == **other
712 }
713}
714
715impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &[T] {
716 #[inline]
717 fn eq(&self, other: &Vec<U>) -> bool {
718 **self == **other
719 }
720}
721
722impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for &mut [T] {
723 #[inline]
724 fn eq(&self, other: &Vec<U>) -> bool {
725 **self == **other
726 }
727}
728
729impl<T: PartialEq<U>, U> PartialEq<[U]> for Vec<T> {
730 #[inline]
731 fn eq(&self, other: &[U]) -> bool {
732 **self == *other
733 }
734}
735
736impl<T: PartialEq<U>, U> PartialEq<Vec<U>> for [T] {
737 #[inline]
738 fn eq(&self, other: &Vec<U>) -> bool {
739 *self == **other
740 }
741}
742
743#[cfg(feature = "alloc")]
744impl<T: PartialEq<U> + Clone, U> PartialEq<Vec<U>> for alloc::borrow::Cow<'_, [T]> {
745 #[inline]
746 fn eq(&self, other: &Vec<U>) -> bool {
747 **self == **other
748 }
749}
750
751impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for Vec<T> {
752 #[inline]
753 fn eq(&self, other: &[U; N]) -> bool {
754 **self == *other
755 }
756}
757
758impl<T: PartialEq<U>, U, const N: usize> PartialEq<&[U; N]> for Vec<T> {
759 #[inline]
760 fn eq(&self, other: &&[U; N]) -> bool {
761 **self == **other
762 }
763}
764
765impl<T: Eq> Eq for Vec<T> {}
766
767impl<T> Extend<T> for Vec<T> {
768 #[inline]
769 fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
770 for elem in iter {
771 self.push(elem);
772 }
773 }
774}
775
776impl<'a, T: Copy> Extend<&'a T> for Vec<T> {
777 #[inline]
778 fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
779 for &elem in iter {
780 self.push(elem);
781 }
782 }
783}
784
785impl<T: Hash> Hash for Vec<T> {
786 #[inline]
787 fn hash<H: Hasher>(&self, state: &mut H) {
788 Hash::hash(&**self, state);
789 }
790}
791
792impl<T, I: SliceIndex<[T]>> Index<I> for Vec<T> {
793 type Output = I::Output;
794
795 #[inline]
796 fn index(&self, index: I) -> &Self::Output {
797 Index::index(&**self, index)
798 }
799}
800
801impl<T, I: SliceIndex<[T]>> IndexMut<I> for Vec<T> {
802 #[inline]
803 fn index_mut(&mut self, index: I) -> &mut Self::Output {
804 IndexMut::index_mut(&mut **self, index)
805 }
806}
807
808impl<'a, T> IntoIterator for &'a Vec<T> {
809 type Item = &'a T;
810
811 type IntoIter = slice::Iter<'a, T>;
812
813 #[inline]
814 fn into_iter(self) -> Self::IntoIter {
815 self.iter()
816 }
817}
818
819impl<'a, T> IntoIterator for &'a mut Vec<T> {
820 type Item = &'a mut T;
821
822 type IntoIter = slice::IterMut<'a, T>;
823
824 #[inline]
825 fn into_iter(self) -> Self::IntoIter {
826 self.iter_mut()
827 }
828}
829
830impl<T> IntoIterator for Vec<T> {
831 type Item = T;
832
833 type IntoIter = IntoIter<T>;
834
835 #[inline]
836 fn into_iter(self) -> Self::IntoIter {
837 IntoIter::new(self)
838 }
839}
840
841impl<T: PartialOrd> PartialOrd for Vec<T> {
842 #[inline]
843 fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
844 PartialOrd::partial_cmp(&**self, &**other)
845 }
846}
847
848impl<T: Ord> Ord for Vec<T> {
849 #[inline]
850 fn cmp(&self, other: &Self) -> cmp::Ordering {
851 Ord::cmp(&**self, &**other)
852 }
853}
854
855pub struct IntoIter<T> {
861 start: *const T,
862 end: *const T,
863 #[allow(dead_code)]
864 allocation: Allocation,
865 marker: PhantomData<T>,
866}
867
868unsafe impl<T: Send> Send for IntoIter<T> {}
870
871unsafe impl<T: Sync> Sync for IntoIter<T> {}
873
874impl<T: UnwindSafe> UnwindSafe for IntoIter<T> {}
876
877impl<T> IntoIter<T> {
878 #[inline]
879 fn new(vec: Vec<T>) -> Self {
880 let mut vec = ManuallyDrop::new(vec);
881
882 let allocation = unsafe { ptr::read(&vec.inner.allocation) };
885
886 let start = vec.as_mut_ptr();
887
888 let end = if T::IS_ZST {
889 start.cast::<u8>().wrapping_add(vec.len()).cast::<T>()
890 } else {
891 unsafe { start.add(vec.len()) }
894 };
895
896 IntoIter {
897 start,
898 end,
899 allocation,
900 marker: PhantomData,
901 }
902 }
903
904 #[inline]
906 #[must_use]
907 pub fn as_slice(&self) -> &[T] {
908 unsafe { slice::from_raw_parts(self.start, self.len()) }
909 }
910
911 #[inline]
913 #[must_use]
914 pub fn as_mut_slice(&mut self) -> &mut [T] {
915 unsafe { slice::from_raw_parts_mut(self.start.cast_mut(), self.len()) }
916 }
917}
918
919impl<T> AsRef<[T]> for IntoIter<T> {
920 #[inline]
921 fn as_ref(&self) -> &[T] {
922 self.as_slice()
923 }
924}
925
926impl<T> AsMut<[T]> for IntoIter<T> {
927 #[inline]
928 fn as_mut(&mut self) -> &mut [T] {
929 self.as_mut_slice()
930 }
931}
932
933impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
934 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
935 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
936 }
937}
938
939impl<T> Drop for IntoIter<T> {
940 fn drop(&mut self) {
941 let elements = ptr::slice_from_raw_parts_mut(self.start.cast_mut(), self.len());
942
943 unsafe { elements.drop_in_place() };
946 }
947}
948
949impl<T> Iterator for IntoIter<T> {
950 type Item = T;
951
952 #[inline]
953 fn next(&mut self) -> Option<Self::Item> {
954 if self.start == self.end {
955 return None;
956 }
957
958 let ptr = if T::IS_ZST {
959 self.end = self.end.cast::<u8>().wrapping_sub(1).cast::<T>();
960
961 self.start
962 } else {
963 let old = self.start;
964
965 self.start = unsafe { old.add(1) };
967
968 old
969 };
970
971 Some(unsafe { ptr.read() })
974 }
975
976 #[inline]
977 fn size_hint(&self) -> (usize, Option<usize>) {
978 let len = self.len();
979
980 (len, Some(len))
981 }
982
983 #[inline]
984 fn count(self) -> usize {
985 self.len()
986 }
987}
988
989impl<T> DoubleEndedIterator for IntoIter<T> {
990 #[inline]
991 fn next_back(&mut self) -> Option<Self::Item> {
992 if self.start == self.end {
993 return None;
994 }
995
996 let ptr = if T::IS_ZST {
997 self.end = self.end.cast::<u8>().wrapping_sub(1).cast::<T>();
998
999 self.start
1000 } else {
1001 self.end = unsafe { self.end.sub(1) };
1003
1004 self.end
1005 };
1006
1007 Some(unsafe { ptr.read() })
1010 }
1011}
1012
1013impl<T> ExactSizeIterator for IntoIter<T> {
1014 #[inline]
1015 fn len(&self) -> usize {
1016 if T::IS_ZST {
1017 self.end.addr().wrapping_sub(self.start.addr())
1018 } else {
1019 unsafe { self.end.offset_from_unsigned(self.start) }
1025 }
1026 }
1027}
1028
1029impl<T> FusedIterator for IntoIter<T> {}
1030
1031#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
1037#[non_exhaustive]
1038pub enum GrowthStrategy {
1039 Exponential {
1046 numerator: usize,
1048
1049 denominator: usize,
1051 },
1052
1053 Linear {
1060 elements: usize,
1062 },
1063}
1064
1065impl Default for GrowthStrategy {
1066 #[inline]
1067 fn default() -> Self {
1068 Self::new()
1069 }
1070}
1071
1072impl GrowthStrategy {
1073 #[inline]
1075 #[must_use]
1076 pub const fn new() -> Self {
1077 Self::Exponential {
1078 numerator: 2,
1079 denominator: 1,
1080 }
1081 }
1082
1083 #[track_caller]
1084 pub(crate) const fn validate(&self) {
1085 match *self {
1086 Self::Exponential {
1087 numerator,
1088 denominator,
1089 } => {
1090 assert!(numerator > denominator);
1091 assert!(denominator != 0);
1092 }
1093 Self::Linear { elements } => {
1094 assert!(elements != 0);
1095 }
1096 }
1097 }
1098
1099 pub(crate) fn grow(&self, old_capacity: usize) -> usize {
1100 match *self {
1101 GrowthStrategy::Exponential {
1102 numerator,
1103 denominator,
1104 } => saturating_mul_div(old_capacity, numerator, denominator),
1105 GrowthStrategy::Linear { elements } => old_capacity.saturating_add(elements),
1106 }
1107 }
1108}
1109
1110#[cfg(target_pointer_width = "64")]
1111type DoubleUsize = u128;
1112#[cfg(target_pointer_width = "32")]
1113type DoubleUsize = u64;
1114#[cfg(target_pointer_width = "16")]
1115type DoubleUsize = u32;
1116
1117fn saturating_mul_div(val: usize, numerator: usize, denominator: usize) -> usize {
1118 (val as DoubleUsize * numerator as DoubleUsize / denominator as DoubleUsize)
1119 .try_into()
1120 .unwrap_or(usize::MAX)
1121}
1122
1123#[cold]
1124#[track_caller]
1125pub(crate) fn handle_error(err: TryReserveError) -> ! {
1126 match err.kind {
1127 CapacityOverflow => capacity_overflow(),
1128 AllocError(err) => handle_alloc_error(err),
1129 }
1130}
1131
1132#[inline(never)]
1133#[track_caller]
1134pub(crate) fn capacity_overflow() -> ! {
1135 panic!("capacity overflow");
1136}
1137
1138#[allow(clippy::needless_pass_by_value)]
1140#[cold]
1141#[inline(never)]
1142#[track_caller]
1143pub(crate) fn handle_alloc_error(err: Error) -> ! {
1144 panic!("allocation failed: {err}");
1145}
1146
1147#[derive(Debug)]
1152pub struct TryReserveError {
1153 pub(crate) kind: TryReserveErrorKind,
1154}
1155
1156impl From<TryReserveErrorKind> for TryReserveError {
1157 #[inline]
1158 fn from(kind: TryReserveErrorKind) -> Self {
1159 TryReserveError { kind }
1160 }
1161}
1162
1163#[derive(Debug)]
1164pub(crate) enum TryReserveErrorKind {
1165 CapacityOverflow,
1166 AllocError(Error),
1167}
1168
1169impl fmt::Display for TryReserveError {
1170 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
1171 match self.kind {
1172 CapacityOverflow => f.write_str(
1173 "memory allocation failed because the computed capacity exceeded the collection's \
1174 maximum",
1175 ),
1176 AllocError(_) => f.write_str(
1177 "memory allocation failed because the operating system returned an error",
1178 ),
1179 }
1180 }
1181}
1182
1183impl core::error::Error for TryReserveError {
1184 fn source(&self) -> Option<&(dyn core::error::Error + 'static)> {
1185 match &self.kind {
1186 TryReserveErrorKind::CapacityOverflow => None,
1187 TryReserveErrorKind::AllocError(err) => Some(err),
1188 }
1189 }
1190}